## Loading required package: knitr

require(ggplot2)
require(lattice) # nicer scatter plots
require(plyr)
require(grid) # contains the arrow function
require(biOps)
require(doMC) # for parallel code
require(EBImage)
require(reshape2) # for the melt function
## To install EBImage
# source("http://bioconductor.org/biocLite.R")
# biocLite("EBImage")
require(png)
require(gridExtra)

# start parallel environment
registerDoMC()
# functions for converting images back and forth
im.to.df<-function(in.img) {
  out.im<-expand.grid(x=1:nrow(in.img),y=1:ncol(in.img))
  out.im$val<-as.vector(in.img)
  out.im
  }
df.to.im<-function(in.df,val.col="val",inv=F) {
  in.vals<-in.df[[val.col]]
  if(class(in.vals[1])=="logical") in.vals<-as.integer(in.vals*255)
  if(inv) in.vals<-255-in.vals
  out.mat<-matrix(in.vals,nrow=length(unique(in.df$x)),byrow=F)
  attr(out.mat,"type")<-"grey"
  out.mat
  }
ddply.cutcols<-function(...,cols=1) {
  # run standard ddply command 
  cur.table<-ddply(...)
  cutlabel.fixer<-function(oVal) {
    sapply(oVal,function(x) {
      cnv<-as.character(x)
      mean(as.numeric(strsplit(substr(cnv,2,nchar(cnv)-1),",")[[1]]))
      })
    }
  cutname.fixer<-function(c.str) {
    s.str<-strsplit(c.str,"(",fixed=T)[[1]]
    t.str<-strsplit(paste(s.str[c(2:length(s.str))],collapse="("),",")[[1]]
    paste(t.str[c(1:length(t.str)-1)],collapse=",")
    }
  for(i in c(1:cols)) {
    cur.table[,i]<-cutlabel.fixer(cur.table[,i])
    names(cur.table)[i]<-cutname.fixer(names(cur.table)[i])
    }
  cur.table
  }


show.pngs.as.grid<-function(file.list,title.fun,zoom=1) {
  preparePng<-function(x) rasterGrob(readPNG(x,native=T,info=T),width=unit(zoom,"npc"),interp=F)
  labelPng<-function(x,title="junk") (qplot(1:300, 1:300, geom="blank",xlab=NULL,ylab=NULL,asp=1)+
                                        annotation_custom(preparePng(x))+
                                        labs(title=title)+theme_bw(24)+
                                        theme(axis.text.x = element_blank(),
                                             axis.text.y = element_blank()))
  imgList<-llply(file.list,function(x) labelPng(x,title.fun(x)) )
  do.call(grid.arrange,imgList)
}

## Standard image processing tools which I use for visualizing the examples in the script
commean.fun<-function(in.df) {
  ddply(in.df,.(val), function(c.cell) {
    weight.sum<-sum(c.cell$weight)
    data.frame(xv=mean(c.cell$x),
               yv=mean(c.cell$y),
               xm=with(c.cell,sum(x*weight)/weight.sum),
               ym=with(c.cell,sum(y*weight)/weight.sum)
               )
    })
  }

colMeans.df<-function(x,...) as.data.frame(t(colMeans(x,...)))

pca.fun<-function(in.df) {
  ddply(in.df,.(val), function(c.cell) {
    c.cell.cov<-cov(c.cell[,c("x","y")])
    c.cell.eigen<-eigen(c.cell.cov)
    
    c.cell.mean<-colMeans.df(c.cell[,c("x","y")])
    out.df<-cbind(c.cell.mean,
                  data.frame(vx=c.cell.eigen$vectors[1,],
                             vy=c.cell.eigen$vectors[2,],
                             vw=sqrt(c.cell.eigen$values),
                             th.off=atan2(c.cell.eigen$vectors[2,],c.cell.eigen$vectors[1,]))
                  )
    })
  }
vec.to.ellipse<-function(pca.df) {
  ddply(pca.df,.(val),function(cur.pca) {
    # assume there are two vectors now
    create.ellipse.points(x.off=cur.pca[1,"x"],y.off=cur.pca[1,"y"],
                          b=sqrt(5)*cur.pca[1,"vw"],a=sqrt(5)*cur.pca[2,"vw"],
                          th.off=pi/2-atan2(cur.pca[1,"vy"],cur.pca[1,"vx"]),
                          x.cent=cur.pca[1,"x"],y.cent=cur.pca[1,"y"])
    })
  }

# test function for ellipse generation
# ggplot(ldply(seq(-pi,pi,length.out=100),function(th) create.ellipse.points(a=1,b=2,th.off=th,th.val=th)),aes(x=x,y=y))+geom_path()+facet_wrap(~th.val)+coord_equal()
create.ellipse.points<-function(x.off=0,y.off=0,a=1,b=NULL,th.off=0,th.max=2*pi,pts=36,...) {
  if (is.null(b)) b<-a
  th<-seq(0,th.max,length.out=pts)
  data.frame(x=a*cos(th.off)*cos(th)+b*sin(th.off)*sin(th)+x.off,
             y=-1*a*sin(th.off)*cos(th)+b*cos(th.off)*sin(th)+y.off,
             id=as.factor(paste(x.off,y.off,a,b,th.off,pts,sep=":")),...)
  }
deform.ellipse.draw<-function(c.box) {
  create.ellipse.points(x.off=c.box$x[1],
                        y.off=c.box$y[1],
                        a=c.box$a[1],
                        b=c.box$b[1],
                        th.off=c.box$th[1],
                        col=c.box$col[1])                    
  }
bbox.fun<-function(in.df) {
  ddply(in.df,.(val), function(c.cell) {
    c.cell.mean<-colMeans.df(c.cell[,c("x","y")])
    xmn<-emin(c.cell$x)
    xmx<-emax(c.cell$x)
    ymn<-emin(c.cell$y)
    ymx<-emax(c.cell$y)
    out.df<-cbind(c.cell.mean,
                  data.frame(xi=c(xmn,xmn,xmx,xmx,xmn),
                             yi=c(ymn,ymx,ymx,ymn,ymn),
                             xw=xmx-xmn,
                             yw=ymx-ymn
                             ))
    })
  }

# since the edge of the pixel is 0.5 away from the middle of the pixel
emin<-function(...) min(...)-0.5
emax<-function(...) max(...)+0.5
extents.fun<-function(in.df) {
  ddply(in.df,.(val), function(c.cell) {
    c.cell.mean<-colMeans.df(c.cell[,c("x","y")])
    out.df<-cbind(c.cell.mean,data.frame(xmin=c(c.cell.mean$x,emin(c.cell$x)),
                                         xmax=c(c.cell.mean$x,emax(c.cell$x)),
                                         ymin=c(emin(c.cell$y),c.cell.mean$y),
                                         ymax=c(emax(c.cell$y),c.cell.mean$y)))
    })
  }

th_fillmap.fn<-function(max.val) scale_fill_gradientn(colours=rainbow(10),limits=c(0,max.val))

Quantitative Big Imaging

author: Kevin Mader date: 17 April 2014 width: 1440 height: 900 css: ../template.css transition: rotate

ETHZ: 227-0966-00L

Dynamic Experiments

Course Outline

Literature / Useful References

Books


Papers / Sites

Previously on QBI ...

Quantitative "Big" Imaging

The course has covered imaging enough and there have been a few quantitative metrics, but "big" has not really entered.

What does big mean?


So what is "big" imaging

Objectives

  1. What sort of dynamic experiments do we have?
  2. How can we design good dynamic experiments?
  3. How can we track objects between points?
  4. How can we track topology?
  5. How can we track voxels?
  6. How can we assess deformation and strain?
  7. How can assess more general cases?

Outline


Motivation


We can say that it looks like, but many pieces of quantitative information are difficult to extract

Scientific Goals

Rheology

Understanding the flow of liquids and mixtures is important for many processes


Deformation

Deformation is similarly important since it plays a significant role in the following scenarios

Experiments

The first step of any of these analyses is proper experimental design. Since there is always


There are always trade-offs to be made between getting the best possible high-resolution nanoscale dynamics and capturing the system level behavior.

Simulation

In many cases, experimental data is inherited and little can be done about the design, but when there is still the opportunity, simulations provide a powerful tool for tuning and balancing a large number parameters

Simulations also provide the ability to pair post-processing to the experiments and determine the limits of tracking.

What do we start with?

Going back to our original cell image

  1. We have been able to get rid of the noise in the image and find all the cells (lecture 2-4)
  2. We have analyzed the shape of the cells using the shape tensor (lecture 5)
  3. We even separated cells joined together using Watershed (lecture 6)
  4. We have created even more metrics characterizing the distribution (lecture 7)

We have at least a few samples (or different regions), large number of metrics and an almost as large number of parameters to tune

How do we do something meaningful with it?

Basic Simulation

We start with a starting image

# Fill Image code
# ... is for extra columns in the data set
fill.img.fn<-function(in.img,step.size=1,...) {
  xr<-range(in.img$x)
  yr<-range(in.img$y)
  ddply(expand.grid(x=seq(xr[1],xr[2],step.size),
                  y=seq(yr[1],yr[2],step.size)),
        .(x,y),
      function(c.pos) {
        ix<-c.pos$x[1]
        iy<-c.pos$y[1]
        nset<-subset(in.img,x==ix & y==iy)
        if(nrow(nset)<1) nset<-data.frame(x=ix,y=iy,val=0,...)
        nset
        })
}
make.spheres<-function(sph.list,base.gr=seq(-1,1,length.out=40)) {
  start.image<-expand.grid(x=base.gr,y=base.gr)
  start.image$val<-c(0)
  for(i in 1:nrow(sph.list)) {
    start.image$val<-with(start.image,
                          val + (
                            ((x-sph.list[i,"x"])^2+(y-sph.list[i,"y"])^2)<
                              sph.list[i,"r"]^2)
      )
  }
  start.image$phase<-with(start.image,ifelse(val>0,TRUE,FALSE))
  start.image
}
rand.list<-function(n.pts,r=0.15,min=-1,max=1) data.frame(x=runif(n.pts,min=-1,max=1),y=runif(n.pts,min=-1,max=1),r=r)
grid.list<-function(n.pts,r=0.15,min=-1,max=1) cbind(expand.grid(x=seq(min,max,length.out=n.pts),y=seq(min,max,length.out=n.pts)),r=r)


Analysis

Describing Motion

\[ \vec{v}(\vec{x})=\langle 0,0.1 \rangle \]

ggplot(subset(make.spheres(test.grid),phase),aes(x,y))+
  geom_raster(fill="gray50",alpha=0.75)+
  geom_segment(data=cbind(test.grid,xv=0,yv=0.1),
               aes(xend=x+xv,yend=y+yv),arrow=arrow(length = unit(0.3,"cm")))+
  coord_equal()+
  theme_bw(20)
plot of chunk unnamed-chunk-2


\[ \vec{v}(\vec{x})=0.3\frac{\vec{x}}{||\vec{x}||}\times \langle 0,0,1 \rangle \]

test.grid$xyr<-with(test.grid,sqrt(x^2+y^2))
ggplot(subset(make.spheres(test.grid),phase),aes(x,y))+
  geom_raster(fill="gray50",alpha=0.75)+
  geom_segment(data=with(test.grid,cbind(test.grid,xv=-0.3*y/xyr,yv=0.3*x/xyr)),
               aes(xend=x+xv,yend=y+yv),arrow=arrow(length = unit(0.3,"cm")))+
  coord_equal()+
  theme_bw(20)
plot of chunk unnamed-chunk-3

Many Frames

\[ \vec{v}(\vec{x})=\langle 0,0.1 \rangle \]

many.frames<-ldply(seq(0,2,length.out=9),function(in.offset) {
  cbind(
    make.spheres(data.frame(x=test.grid$x,y=test.grid$y+in.offset,r=test.grid$r)),
    offset=in.offset
    )
})

ggplot(subset(many.frames,phase),aes(x,y))+
  geom_raster(fill="gray50",alpha=0.75)+
  coord_equal()+
  facet_wrap(~offset)+
  labs(title="Different Frames in Linear Flow Image")+
  theme_bw(20)
plot of chunk unnamed-chunk-4


\[ \vec{v}(\vec{x})=0.3\frac{\vec{x}}{||\vec{x}||}\times \langle 0,0,1 \rangle \]

many.frames<-cbind(make.spheres(test.grid),offset=0)
last.frame<-test.grid
for(in.offset in seq(0,2,length.out=9)) {
  last.frame$xyr<-with(last.frame,sqrt(x^2+y^2))
  last.frame$xyr[which(last.frame$xyr==0)]<-1
  last.frame<-with(last.frame,data.frame(x=x-0.05*y/xyr,y=y+0.05*x/xyr,r=r))
  
  many.frames<-rbind(many.frames,
                     cbind(make.spheres(last.frame),offset=in.offset)
                     )
}
ggplot(subset(many.frames,phase),aes(x,y))+
  geom_raster(fill="gray50",alpha=0.75)+
  coord_equal()+
  facet_wrap(~offset)+
  labs(title="Different Frames in Spiral Flow")+
  theme_bw(20)
plot of chunk unnamed-chunk-5

Random Appearance / Disappearance

Under perfect imaging and experimental conditions objects should not appear and reappear but due to

It is common for objects to appear and vanish regularly in an experiment.


many.frames<-ldply(seq(0,1.,length.out=9),function(in.offset) {
  c.grid<-test.grid[sample(nrow(test.grid), 18), ]
  cbind(
    make.spheres(data.frame(x=c.grid$x,y=c.grid$y+in.offset,r=c.grid$r)),
    offset=in.offset
    )
})
ggplot(subset(many.frames,phase),aes(x,y))+
  geom_raster(fill="gray50",alpha=0.75)+
  coord_equal()+
  facet_wrap(~offset)+
  labs(title="Different Frames in Linear Flow Image")+
  theme_bw(20)
plot of chunk unnamed-chunk-6

Jitter / Motion Noise

Even perfect spherical objects do not move in a straight line. The jitter can be seen as a stochastic variable with a random magnitude (\(a\)) and angle (\(b\)). This is then sampled at every point in the field

\[ \vec{v}(\vec{x})=\vec{v}_L(\vec{x})+||a||\measuredangle b \]

ggplot(subset(make.spheres(test.grid),phase),aes(x,y))+
  geom_raster(fill="gray50",alpha=0.75)+
  geom_segment(data=cbind(test.grid,
                          xv=0+runif(nrow(test.grid),min=-0.05,max=0.05),
                          yv=0.1+runif(nrow(test.grid),min=-0.05,max=0.05)),
               aes(xend=x+xv,yend=y+yv),arrow=arrow(length = unit(0.3,"cm")))+
  coord_equal()+
  theme_bw(20)
plot of chunk unnamed-chunk-7

Jitter (Continued)

Over many frames this can change the path significantly

last.frame<-test.grid[,c("x","y","r")]
last.frame$id<-1:nrow(last.frame)
many.frames<-cbind(make.spheres(last.frame),offset=0)
many.grids<-cbind(last.frame,offset=0)

for(in.offset in cumsum(rep(0.2,8))) {
  last.frame<-with(last.frame,
                   data.frame(x=x+runif(nrow(last.frame),min=-0.1,max=0.1),
                              y=y+0.2+runif(nrow(last.frame),min=-0.1,max=0.1),
                              r=r)
                   )
  
  last.frame$id<-1:nrow(last.frame)
  
  many.frames<-rbind(many.frames,
                     cbind(make.spheres(last.frame),offset=in.offset)
                     )
  
  many.grids<-rbind(many.grids,
                    cbind(last.frame,offset=in.offset)
                    )
}
ggplot(subset(many.frames,phase),aes(x,y))+
  geom_raster(fill="gray50",alpha=0.75)+
  coord_equal()+
  facet_wrap(~offset)+
  labs(title="Different Frames in Linear Flow Image")+
  theme_bw(20)
plot of chunk unnamed-chunk-8


The simulation can be represented in a more clear fashion by using single lines to represent each spheroid

ggplot(many.grids,aes(x,y,))+
  geom_path(aes(color=id,group=id))+
  coord_equal()+
  labs(title="Different Paths in Linear Jittered Flow Image")+
  scale_color_gradientn(colours=rainbow(10))+
  theme_bw(20)
plot of chunk unnamed-chunk-9

Limits of Tracking

We see that visually tracking samples can be difficult and there are a number of parameters which affect the ability for us to clearly see the tracking.


We thus try to quantify the limits of these parameters for different tracking methods in order to design experiments better.

Acquisition-based Parameters

Experimental Parameters

Tracking: Nearest Neighbor

While there exist a number of different methods and complicated approaches for tracking, for experimental design it is best to start with the simplist, easiest understood method. The limits of this can be found and components added as needed until it is possible to realize the experiment

We then return to nearest neighbor which means we track a point (\(\vec{P}_0\)) from an image (\(I_0\)) at \(t_0\) to a point (\(\vec{P}_1\)) in image (\(I_1\)) at \(t_1\) by

\[ \vec{P}_1=\textrm{argmin}(||\vec{P}_0-\vec{y}|| \forall \vec{y}\in I_1) \]

Scoring Tracking

In the following examples we will use simple metrics for scoring fits where the objects are matched and the number of misses is counted.

There are a number of more sensitive scoring metrics which can be used, by finding the best submatch for a given particle since the number of matches and particles does not always correspond. See the papers at the beginning for more information

source('~/Dropbox/TIPL/src/R/trackingCode.R')
source('~/Dropbox/WorkRelated/TrackTopology/simFlow.R')

Basic Simulations

Input flow from simulation

\[ \vec{v}(\vec{x})=\langle 0,0,0.05 \rangle+||0.01||\measuredangle b \]

# Generate a simple synthetic dataset
in.frames<-generate.frames(base.rand=0.01,crop.size=c(-10,10),n.objects=20,n.frames=10,flow.rate=0.05,force.2d=T) 
all.tracks<-track.frames(in.frames,offset=c(0,0,0.1),run.offset=T,run.adaptive=T,maxVolPenalty=NA)

ggplot(do.call(rbind,in.frames),
       aes(x=POS_X,y=POS_Z,color=as.factor(REAL_LACUNA_NUMBER)))+
  geom_path(aes(linetype="Original"))+
  labs(x="X Position",y="Z Position",color="Object ID",title="Flow Simulation Results")+
  theme_bw(20)
plot of chunk unnamed-chunk-10


Nearest Neighbor Tracking

ggplot(do.call(rbind,in.frames),
       aes(x=POS_X,y=POS_Z,color=as.factor(REAL_LACUNA_NUMBER)))+
  #geom_path(aes(linetype="Original"))+
  geom_path(data=subset(all.tracks,Matching=="No Offset"),aes(linetype="Tracked"),size=2,alpha=0.5)+
  facet_wrap(~Matching)+
  labs(x="X Position",y="Z Position",color="Object ID",title="Tracking Results")+
  theme_bw(20)
plot of chunk unnamed-chunk-11

More Complicated Flows

Input flow from simulation

\[ \vec{v}(\vec{x})=\langle 0,0,0.01 \rangle+||0.05||\measuredangle b \]

# Generate a simple synthetic dataset
in.frames<-generate.frames(base.rand=0.1,crop.size=c(-10,10),n.objects=20,n.frames=20,flow.rate=0.01,
                           force.2d=T,rand.fun=function(x,min=0,max=1) rnorm(x,(min+max)/2,(max-min)/2)) 
all.tracks<-track.frames(in.frames,offset=c(0,0,0.1),run.offset=T,run.adaptive=T,maxVolPenalty=NA)

ggplot(do.call(rbind,in.frames),
       aes(x=POS_X,y=POS_Z,color=as.factor(REAL_LACUNA_NUMBER)))+
  geom_path(aes(linetype="Original"))+
  labs(x="X Position",y="Z Position",color="Object ID",title="Flow Simulation Results")+
  theme_bw(20)
plot of chunk unnamed-chunk-12


Nearest Neighbor Tracking

ggplot(do.call(rbind,in.frames),
       aes(x=POS_X,y=POS_Z,color=as.factor(REAL_LACUNA_NUMBER)))+
  geom_path(aes(linetype="Original"))+
  geom_path(data=subset(all.tracks,Matching=="No Offset"),aes(linetype="Tracked"),size=2,alpha=0.5)+
  facet_wrap(~Matching)+
  labs(x="X Position",y="Z Position",color="Object ID",title="Tracking Results")+
  theme_bw(20)
plot of chunk unnamed-chunk-13

Quantifying Tracking Rate

We can then quantify the success rate of each algorithm on the data set using the very simple match and mismatch metrics

c.tracks<-subset(all.tracks,Matching=="No Offset")
c.tracks<-all.tracks
c.subtrack<-subset(c.tracks,abs(D_REAL_LACUNA_NUMBER)>0)
ggplot(do.call(rbind,in.frames),
       aes(x=POS_X,y=POS_Z))+
  geom_path(aes(linetype="Original",color=as.factor(REAL_LACUNA_NUMBER)))+
  geom_path(data=c.tracks,aes(linetype="Tracked",color=as.factor(REAL_LACUNA_NUMBER)),size=2,alpha=0.5)+
  geom_point(data=c.subtrack,aes(shape="Missed"),size=5,alpha=0.5,color="red")+
  facet_wrap(~Matching)+
  labs(x="X Position",y="Z Position",color="Object ID",title="Tracking Results",fill="Misses")+
  theme_bw(20)
plot of chunk unnamed-chunk-14


Counting Misses

c.tracks<-subset(all.tracks,Matching=="No Offset")
c.tracks<-all.tracks
c.subtrack<-subset(c.tracks,abs(D_REAL_LACUNA_NUMBER)>0)
ggplot(do.call(rbind,in.frames),
       aes(x=POS_X,y=POS_Z))+
  stat_binhex(data=c.subtrack,bins=5,drop=F,alpha=1)+
  scale_fill_gradient2(low="white",high="red")+
  facet_wrap(~Matching)+
  labs(x="X Position",y="Z Position",color="Object ID",title="Tracking Results",fill="Misses")+
  theme_bw(20)
plot of chunk unnamed-chunk-15

pout<-function(x) paste(round(1000*(1-mean(x)))/10,"%",sep="")
match.table<-ddply.cutcols(all.tracks,.(cut_interval(sample,10)),function(all.sample) { 
    data.frame(
               BIJ_MATCHA=with(subset(all.sample,Matching=="No Offset"),pout(BIJ_MATCH)),
               BIJ_MATCHB=with(subset(all.sample,Matching=="Fix Offset"),pout(BIJ_MATCH)),
               BIJ_MATCHC=with(subset(all.sample,Matching=="Adaptive Offset"),pout(BIJ_MATCH))
               )
    })

names(match.table)<-c("Time","NN","ONN","ANN")
kable(match.table)
Time NN ONN ANN
1.9 22.5% 22.5% 25%
3.7 22.5% 25% 25%
5.5 32.5% 35% 32.5%
7.3 32.5% 32.5% 27.5%
9.1 20% 30% 22.5%
10.9 20% 20% 25%
12.7 12.5% 17.5% 17.5%
14.5 17.5% 25% 20%
16.3 15% 17.5% 12.5%
18.1 22.5% 27.5% 25%

Jitter Sensitivity

match.qual<-function(in.tracks) ddply(in.tracks,.(Matching),function(c.subset) {
  data.frame(Obj.Matched=sum(c.subset$D_REAL_LACUNA_NUMBER==0),
              Obj.Missed=sum(abs(c.subset$D_REAL_LACUNA_NUMBER)>0))
})
rand.fun.norm<-function(n,minv,maxv) rnorm(n,(maxv+minv)/2,(maxv-minv)/2)
n.iters<-20
z.vel<-0.1
n.frames<-2


jd.gen.fun<-function(c.jitter,c.obj.count,...) {
  test.data<-generate.frames(base.rand=c.jitter*z.vel,crop.size=c(-10,10),n.objects=c.obj.count,n.frames=n.frames,rand.fun=rand.fun.norm,...) 
  test.tracks<-track.frames(test.data,offset=c(0,0,z.vel),run.offset=T,run.adaptive=T,maxVolPenalty=NA)
  cbind(jitter=c.jitter,obj.count=c.obj.count,
        mean_obj_spacing=((1/c.obj.count)^0.33)/z.vel,
        match.qual(test.tracks))
}
jitter.vals<-rep(seq(0,2.5,length.out=15),n.iters)
jitter.full<-
jitter.summary.3d<-ddply(ldply(jitter.vals,function(c.jitter) jd.gen.fun(c.jitter,20),.parallel=T),
                      .(jitter,Matching),
                      function(c.subset) {
  data.frame(Matched=100*sum(c.subset$Obj.Matched)/(sum(c.subset$Obj.Missed)+sum(c.subset$Obj.Matched)))
})
jitter.summary.2d<-ddply(ldply(jitter.vals,function(c.jitter) jd.gen.fun(c.jitter,20,force.2d=T),.parallel=T),
                      .(jitter,Matching),
                      function(c.subset) {
  data.frame(Matched=100*sum(c.subset$Obj.Matched)/(sum(c.subset$Obj.Missed)+sum(c.subset$Obj.Matched)))
})
 ggplot(rbind(cbind(jitter.summary.3d,geom="3D"),cbind(jitter.summary.2d,geom="2D")),
        aes(x=100*jitter,y=Matched,color=geom))+
  geom_line()+geom_point()+facet_wrap(~Matching)+
  theme_bw(24)+labs(x="Position Jitter (% of Velocity)",y="% of Bubbles Matched",color="Matching Type")
plot of chunk unnamed-chunk-17

Density and Jitter

n.iters<-20
registerDoMC(8) # divide the jobs better

jitter.vals<-seq(0,2,length.out=4)
irseq<-function(a,b,length.out) {1/seq(1/b^(1/3),1/a^(1/3),length.out=length.out)^3} # seq for inverted numbers
obj.count<-irseq(25,2500,length.out=3)

jit.bub<-merge(obj.count,jitter.vals)
jd.vals<-mapply(list,rep(jit.bub[,1],n.iters),rep(jit.bub[,2],n.iters),SIMPLIFY=F)

jd.full<-ldply(jd.vals,.parallel=T,
               function(c.in) jd.gen.fun(c.in[[2]],c.in[[1]]))
jd.summary<-ddply(jd.full,.(jitter,mean_obj_spacing,Matching),function(c.subset) {
  data.frame(obj.count=c.subset$obj.count[1],
             Matched=100*sum(c.subset$Obj.Matched)/(sum(c.subset$Obj.Missed)+sum(c.subset$Obj.Matched)),
             obj.matched=sum(c.subset$Obj.Matched),
             obj.found=100*with(c.subset,sum(Obj.Matched)/(n.iters*(n.frames-1)*obj.count[1])))
})

 ggplot(jd.summary,aes(x=100*jitter,y=Matched,color=as.factor(round(100*mean_obj_spacing))))+
  geom_line()+geom_point()+facet_wrap(~Matching)+
  theme_bw(24)+labs(x="Position Jitter (% of Velocity)",y="% of Obj Matched",color="Obj.Spacing\n(% of Velocity)")
plot of chunk unnamed-chunk-18


ggplot(jd.summary,aes(x=100*jitter,y=100*mean_obj_spacing,fill=Matched))+
  geom_tile()+facet_wrap(~Matching)+
  labs(x="Position Jitter (% of Velocity)",fill="% of Obj Matched",y="Obj.Spacing (% of Velocity)")+
  theme_bw(24)
plot of chunk unnamed-chunk-19

Specifications

kable(jd.summary)
jitter mean_obj_spacing Matching obj.count Matched obj.matched obj.found
0.0000 0.7563 No Offset 2500.0 2.156 1078 2.156
0.0000 0.7563 Fix Offset 2500.0 100.000 50000 100.000
0.0000 0.7563 Adaptive Offset 2500.0 100.000 50000 100.000
0.0000 2.1113 No Offset 111.4 41.622 924 41.478
0.0000 2.1113 Fix Offset 111.4 100.000 2220 99.655
0.0000 2.1113 Adaptive Offset 111.4 100.000 2220 99.655
0.0000 3.4568 No Offset 25.0 80.600 403 80.600
0.0000 3.4568 Fix Offset 25.0 100.000 500 100.000
0.0000 3.4568 Adaptive Offset 25.0 100.000 500 100.000
0.6667 0.7563 No Offset 2500.0 3.922 1961 3.922
0.6667 0.7563 Fix Offset 2500.0 8.122 4061 8.122
0.6667 0.7563 Adaptive Offset 2500.0 8.132 4066 8.132
0.6667 2.1113 No Offset 111.4 37.703 837 37.572
0.6667 2.1113 Fix Offset 111.4 54.144 1202 53.957
0.6667 2.1113 Adaptive Offset 111.4 53.919 1197 53.733
0.6667 3.4568 No Offset 25.0 69.000 345 69.000
0.6667 3.4568 Fix Offset 25.0 81.600 408 81.600
0.6667 3.4568 Adaptive Offset 25.0 80.000 400 80.000
1.3333 0.7563 No Offset 2500.0 2.326 1163 2.326
1.3333 0.7563 Fix Offset 2500.0 2.580 1290 2.580
1.3333 0.7563 Adaptive Offset 2500.0 2.574 1287 2.574
1.3333 2.1113 No Offset 111.4 22.748 505 22.669
1.3333 2.1113 Fix Offset 111.4 25.541 567 25.452
1.3333 2.1113 Adaptive Offset 111.4 25.586 568 25.497
1.3333 3.4568 No Offset 25.0 51.600 258 51.600
1.3333 3.4568 Fix Offset 25.0 57.600 288 57.600
1.3333 3.4568 Adaptive Offset 25.0 57.400 287 57.400
2.0000 0.7563 No Offset 2500.0 1.010 505 1.010
2.0000 0.7563 Fix Offset 2500.0 1.168 584 1.168
2.0000 0.7563 Adaptive Offset 2500.0 1.166 583 1.166
2.0000 2.1113 No Offset 111.4 12.928 287 12.883
2.0000 2.1113 Fix Offset 111.4 13.514 300 13.467
2.0000 2.1113 Adaptive Offset 111.4 13.378 297 13.332
2.0000 3.4568 No Offset 25.0 35.000 175 35.000
2.0000 3.4568 Fix Offset 25.0 34.200 171 34.200
2.0000 3.4568 Adaptive Offset 25.0 33.400 167 33.400

Designing Experiments

How does this help us to design experiments?


How much is enough?

Extending Nearest Neighbor

Bijective Requirement

\[ \vec{P}_f=\textrm{argmin}(||\vec{P}_0-\vec{y} || \forall \vec{y}\in I_1) \] \[ \vec{P}_i=\textrm{argmin}(||\vec{P}_f-\vec{y} || \forall \vec{y}\in I_0) \]

\[ \vec{P}_i \stackrel{?}{=} \vec{P}_0 \]

Maximum Displacement

\[ \vec{P}_1=\begin{cases} ||\vec{P}_0-\vec{y} ||<\textrm{MAXD}, & \textrm{argmin}(||\vec{P}_0-\vec{y} || \forall \vec{y}\in I_1) \\ \textrm{Otherwise}, & \emptyset \end{cases}\]


Prior / Expected Movement

\[ \vec{P}_1=\textrm{argmin}(||\vec{P}_0+\vec{v}_{offset}-\vec{y} || \forall \vec{y}\in I_1) \]

Adaptive Movement

Can then be calculated in an iterative fashion where the offset is the average from all of the \(\vec{P}_1-\vec{P}_0\) vectors. It can also be performed \[ \vec{P}_1=\textrm{argmin}(||\vec{P}_0+\vec{v}_{offset}-\vec{y} || \forall \vec{y}\in I_1) \]

Beyond Nearest Neighbor

While nearest neighbor provides a useful starting tool it is not sufficient for truly complicated flows and datasets.

Better Approaches

  1. Multiple Hypothesis Testing Nearest neighbor just compares the points between two frames and there is much more information available in most time-resolved datasets. This approach allows for multiple possible paths to be explored at the same time and the best chosen only after all frames have been examined

Shortcomings

  1. Merging and Splitting Particles The simplicity of the nearest neighbor model does really allow for particles to merge and split (relaxing the bijective requirement allows such behavior, but the method is still not suited for such tracking). For such systems a more specific, physically-based is required to encapsulate this behavior.

Voxel-based Approaches

For voxel-based approachs the most common analyses are digital image correlation (or for 3D images digital volume correlation), where the correlation is calculated between two images or volumes.

Standard Image Correlation

Given images \(I_0(\vec{x})\) and \(I_1(\vec{x})\) at time \(t_0\) and \(t_1\) respectively. The correlation between these two images can be calculated

\[ C_{I_0,I_1}(\vec{r})=\langle I_0(\vec{x}) I_1(\vec{x}+\vec{r}) \rangle \]


# so everything is on an integer lattice (interpolation makes everything messier)
fix.grid<-function(in.grid) { 
  cols.nopos<-!(names(in.grid) %in% c("x","y"))
  out.grid<-in.grid[,cols.nopos]
  out.grid$x<-as.numeric(as.factor(in.grid$x))
  out.grid$y<-as.numeric(as.factor(in.grid$y))
  out.grid
}
start.grid<-grid.list(5)
start.img<-fix.grid(make.spheres(start.grid,base.gr=seq(-1,1,length.out=40)))
final.grid<-data.frame(x=with(start.grid,x),y=with(start.grid,y+0.1),r=start.grid$r)
final.img<-fix.grid(make.spheres(final.grid,base.gr=seq(-1,1,length.out=40)))
ggplot()+
  geom_raster(data=subset(start.img,phase),aes(x,y,fill="0"),alpha=0.75)+
  geom_raster(data=subset(final.img,phase),aes(x,y,fill="1"),alpha=0.75)+
  coord_equal()+
  labs(fill="time")+
  theme_bw(20)
plot of chunk unnamed-chunk-21

#' Calculate the cross correlation 
#' @author Kevin Mader (kevin.mader@gmail.com)
#' Generates flow with given object count, frame count and randomness
#' the box and crop are introduced to allow for objects entering and 
#' leaving the field of view
#'
#' @param img.a is the starting or I_0 image
#' @param img.b is the destination or I_1 image
#' @param tr.x is the function transforming the x coordinate
#' @param 
#' 
cc.imfun<-function(img.a,img.b,tr.x=function(x,y) x,tr.y=function(x,y) y) {
  # get the positions in image a
  x.vals<-unique(img.a$x)
  y.vals<-unique(img.a$y)
  
  # transform image b
  tr.img.b<-img.b
  # round is used to put everything back on an integer lattice
  tr.img.b$x<-round(with(img.b,tr.x(x,y)))
  tr.img.b$y<-round(with(img.b,tr.y(x,y)))
  # count the overlapping pixels in the window to normalize
  tr.img.b<-subset(tr.img.b,
                   ((x %in% x.vals) & (y %in% y.vals))
                   )
  norm.f<-nrow(tr.img.b)
  if(norm.f<1) norm.f<-1
  
  # keep only the in phase objects
  tr.img.a<-subset(img.a,phase)
  tr.img.b<-subset(tr.img.b,phase)
  
  if (nrow(tr.img.a)>0 & nrow(tr.img.b)>0) {
    matches<-ddply(rbind(cbind(tr.img.a,label="A"),cbind(tr.img.b,label="B")),.(x,y),
                   function(c.pos) {
                     if(nrow(c.pos)>1) data.frame(e.val=1)
                     else data.frame(e.val=c())
                     })
    data.frame(e.val=sum(matches$e.val)/norm.f,count=nrow(matches),size=norm.f)
    } else {
      data.frame(e.val=0,count=0,size=norm.f)
    }
  
  }

cc.points<-expand.grid(vx=seq(-8,8,1),vy=seq(-8,8,1))
cc.img<-ddply(cc.points,.(vx,vy),function(c.pt) {
  tr.x<-function(x,y) (x+c.pt[1,"vx"])
  tr.y<-function(x,y) (y+c.pt[1,"vy"])
  cc.imfun(start.img,final.img,tr.x=tr.x,tr.y=tr.y)
},.parallel=T)

ggplot(cc.img,aes(vx,vy,fill=e.val))+
  geom_raster()+geom_density2d(data=subset(cc.img,e.val>0),aes(weight=e.val))+
  labs(x="u",y="v",fill="Correlation",title="Correlation vs R")+
  scale_fill_gradient2(high="red")+
  theme_bw(25)
plot of chunk unnamed-chunk-23

Random Image Positions

With highly structured / periodic samples identfying a best correlation is difficult since there are multiple maxima.

start.grid<-rand.list(12)
start.img<-fix.grid(make.spheres(start.grid,base.gr=seq(-1,1,length.out=30)))

final.grid<-data.frame(x=with(start.grid,x),y=with(start.grid,y+0.2),r=start.grid$r)
final.img<-fix.grid(make.spheres(final.grid,base.gr=seq(-1,1,length.out=30)))

ggplot()+
  geom_raster(data=subset(start.img,phase),aes(x,y,fill="0"),alpha=0.75)+
  geom_raster(data=subset(final.img,phase),aes(x,y,fill="1"),alpha=0.75)+
  coord_equal()+
  labs(fill="time")+
  theme_bw(20)
plot of chunk unnamed-chunk-24


cc.points<-expand.grid(vx=seq(-8,8,1),vy=seq(-8,8,1))
cc.img<-ddply(cc.points,.(vx,vy),function(c.pt) {
  tr.x<-function(x,y) (x+c.pt[1,"vx"])
  tr.y<-function(x,y) (y+c.pt[1,"vy"])
  cc.imfun(start.img,final.img,tr.x=tr.x,tr.y=tr.y)
},.parallel=T)

ggplot(cc.img,aes(vx,vy,fill=e.val))+
  geom_raster()+geom_density2d(data=subset(cc.img,e.val>0),aes(weight=e.val^2))+
  labs(x="u",y="v",fill="Correlation",title="Correlation vs r")+
  scale_fill_gradient2(high="red")+
  theme_bw(25)
plot of chunk unnamed-chunk-26

Extending Correlation

The correlation function can be extended by adding rotation and scaling terms to the offset making the tool more flexible but also more computationally expensive for large search spaces.

\[ C_{I_0,I_1}(\vec{r},s,\theta)= \] \[ \langle I_0(\vec{x}) I_1( \begin{bmatrix} s\cos\theta & -s\sin\theta\\ s\sin\theta & s\cos\theta \end{bmatrix} \vec{x}+\vec{r}) \rangle \]


start.grid<-rand.list(15)
rot.th<--pi/4
start.img<-fix.grid(make.spheres(start.grid,base.gr=seq(-1,1,length.out=60)))
final.grid<-data.frame(x=0.9*with(start.grid,cos(rot.th)*x-sin(rot.th)*y),
                       y=0.9*with(start.grid,cos(rot.th)*y+sin(rot.th)*x),r=start.grid$r)
final.img<-fix.grid(make.spheres(final.grid,base.gr=seq(-1,1,length.out=60)))
ggplot()+
  geom_raster(data=subset(start.img,phase),aes(x,y,fill="0"),alpha=0.75)+
  geom_raster(data=subset(final.img,phase),aes(x,y,fill="1"),alpha=0.75)+
  coord_equal()+
  labs(fill="time")+
  theme_bw(20)
plot of chunk unnamed-chunk-27

cc.points<-expand.grid(theta=seq(0,pi/2,length.out=8),a=seq(0.7,1,length.out=5))
mid.pt<-c(20,20)
cc.img<-ddply(cc.points,.(theta,a),function(c.pt) {
  tr.x<-function(x,y) 
    (c.pt[1,"a"]*(cos(c.pt[1,"theta"])*(x-mid.pt[1])-sin(c.pt[1,"theta"])*(y-mid.pt[2]))+mid.pt[1])
  tr.y<-function(x,y) 
    (c.pt[1,"a"]*(sin(c.pt[1,"theta"])*(x-mid.pt[1])+cos(c.pt[1,"theta"])*(y-mid.pt[2]))+mid.pt[1])
  cc.imfun(start.img,final.img,tr.x=tr.x,tr.y=tr.y)
},.parallel=T)

ggplot(cc.img,aes(theta*180/pi,a*100,fill=e.val))+
  geom_raster()+geom_density2d(data=subset(cc.img,e.val>0),aes(weight=e.val))+
  labs(x="Rotation (deg)",y="Scale (%)",fill="Correlation",title="Correlation vs R")+
  scale_fill_gradient2(high="red")+
  theme_bw(25)
plot of chunk unnamed-chunk-29

More Complicated Changes

start.grid<-rand.list(20)
start.img<-fix.grid(make.spheres(start.grid,base.gr=seq(-1,1,length.out=30)))

final.grid<-data.frame(x=with(start.grid,sign(x)*abs(x)^1.5),y=with(start.grid,sign(y)*abs(y)^1.5),r=start.grid$r)
final.img<-fix.grid(make.spheres(final.grid,base.gr=seq(-1,1,length.out=30)))

ggplot()+
  geom_raster(data=subset(start.img,phase),aes(x,y,fill="0"),alpha=0.75)+
  geom_raster(data=subset(final.img,phase),aes(x,y,fill="1"),alpha=0.75)+
  coord_equal()+
  labs(fill="time")+
  theme_bw(20)
plot of chunk unnamed-chunk-30


cc.points<-expand.grid(vx=seq(-6,6,1),vy=seq(-6,6,1))
cc.img<-ddply(cc.points,.(vx,vy),function(c.pt) {
  tr.x<-function(x,y) (x+c.pt[1,"vx"])
  tr.y<-function(x,y) (y+c.pt[1,"vy"])
  cc.imfun(start.img,final.img,tr.x=tr.x,tr.y=tr.y)
},.parallel=T)                            

ggplot(subset(cc.img,count>0),aes(vx,vy,fill=e.val))+
  geom_raster()+geom_density2d(data=subset(cc.img,e.val>0),aes(weight=e.val^2))+
  labs(x="u",y="v",fill="Correlation",title="Correlation vs r")+
  scale_fill_gradient2(high="red")+
  theme_bw(25)
plot of chunk unnamed-chunk-32

Subdividing the data

We can approach the problem by subdividing the data into smaller blocks and then apply the digital volume correlation independently to each block.

Choosing a window size

blockify.img<-function(in.img,nx=5,ny=5,block.fn=function(c.block) c.block,...) {
  out.img<-ddply(in.img,.(cut_interval(x,nx),cut_interval(y,ny)),block.fn,...)
  names(out.img)[c(1:2)]<-c("x.block","y.block")
  out.img$label.block<-with(out.img,paste(x.block,y.block,sep=","))
  cutlabel.fixer<-function(oVal) {
    sapply(oVal,function(x) {
      cnv<-as.character(x)
      mean(as.numeric(strsplit(substr(cnv,2,nchar(cnv)-1),",")[[1]]))
      })
    }
  out.img$x.center<-cutlabel.fixer(out.img$x.block)
  out.img$y.center<-cutlabel.fixer(out.img$y.block)
  out.img
}

start.img.blocks<-blockify.img(start.img,4,4)
final.img.blocks<-blockify.img(final.img,4,4)

comb.img.blocks<-rbind(cbind(start.img.blocks,time=0),
                cbind(final.img.blocks,time=1))

ggplot(subset(comb.img.blocks,phase),aes(x,y,fill=as.factor(time)))+
  geom_raster(alpha=0.75)+
  facet_grid(y.block~x.block,scales="free",as.table=F)+
  theme_bw(10)
plot of chunk unnamed-chunk-33


#' Calculate the cross correlation 
#' @author Kevin Mader (kevin.mader@gmail.com)
#' Generates flow with given object count, frame count and randomness
#' the box and crop are introduced to allow for objects entering and 
#' leaving the field of view
#'
#' @param bulk.img the image passed from the blockify command
#' @param f.img is the template image to compare against (since we want the whole image not just a region)
#' @param nsize is the size of the region to search
#' @param nstep is the step to use
block.corr.fun<-function(bulk.img,s.img,nsize=6,nstep=2) {
    cc.points<-expand.grid(vx=seq(-nsize,nsize,nstep),vy=seq(-nsize,nsize,nstep))
    ddply(cc.points,.(vx,vy),function(c.pt) {
        tr.x<-function(x,y) (x+c.pt[1,"vx"])
        tr.y<-function(x,y) (y+c.pt[1,"vy"])
        cc.imfun(s.img,bulk.img,tr.x=tr.x,tr.y=tr.y)
    })
}
w.bcf<-function(s.img,nsize=7,nstep=2) function(cur.block) block.corr.fun(cur.block,s.img,nsize=nsize,nstep=nstep)
max.corr.val<-function(in.blocks) ddply(in.blocks,.(x.center,y.center),function(in.block) {
  subset(in.block,e.val>=max(in.block$e.val) & count>0)
})

corr.img.blocks<-blockify.img(final.img,4,4,block.fn=w.bcf(start.img),.parallel=T)
ggplot(corr.img.blocks,aes(vx,vy,fill=e.val))+
  geom_raster()+geom_density2d(data=subset(corr.img.blocks,e.val>0),aes(weight=e.val^2))+
  coord_equal()+facet_grid(y.block~x.block,as.table=F)+
  scale_fill_gradient2(high="red")+
  labs(fill="Correlation")+
  theme_bw(10)
plot of chunk unnamed-chunk-34

Overlap

max.block.vals<-max.corr.val(corr.img.blocks)
ggplot()+
  geom_raster(data=subset(start.img,phase),aes(x,y,fill="0"),alpha=0.75)+
  geom_raster(data=subset(final.img,phase),aes(x,y,fill="1"),alpha=0.75)+
  geom_point(data=max.block.vals,aes(x.center,y.center),color="green",size=2)+
  geom_segment(data=max.block.vals,aes(x.center,y.center,xend=x.center+vx,yend=y.center+vy),
               arrow=arrow(length = unit(0.3,"cm")))+
  coord_equal()+
  labs(fill="time")+
  theme_bw(20)
plot of chunk unnamed-chunk-35

Too large of a window

start.img.blocks<-blockify.img(start.img,2,2)
final.img.blocks<-blockify.img(final.img,2,2)
ggplot()+
  geom_raster(data=subset(start.img.blocks,phase),aes(x,y,fill="0"),alpha=0.75)+
  geom_raster(data=subset(final.img.blocks,phase),aes(x,y,fill="1"),alpha=0.75)+
  facet_grid(y.block~x.block,scales="free",as.table=F)+
  labs(fill="time")+
  theme_bw(10)
plot of chunk unnamed-chunk-36


corr.img.blocks<-blockify.img(final.img,2,2,block.fn=w.bcf(start.img),.parallel=T)
ggplot(corr.img.blocks,aes(vx,vy,fill=e.val))+
  geom_raster()+geom_density2d(data=subset(corr.img.blocks,e.val>0),aes(weight=e.val^2))+
  coord_equal()+facet_grid(y.block~x.block,as.table=F)+
  scale_fill_gradient2(high="red")+
  labs(fill="Correlation")+
  theme_bw(10)
plot of chunk unnamed-chunk-37

Overlap

max.block.vals<-max.corr.val(corr.img.blocks)
ggplot()+
  geom_raster(data=subset(start.img,phase),aes(x,y,fill="0"),alpha=0.75)+
  geom_raster(data=subset(final.img,phase),aes(x,y,fill="1"),alpha=0.75)+
  geom_point(data=max.block.vals,aes(x.center,y.center),color="green",size=2)+
  geom_segment(data=max.block.vals,aes(x.center,y.center,xend=x.center+vx,yend=y.center+vy),
               arrow=arrow(length = unit(0.3,"cm")))+
  coord_equal()+
  labs(fill="time")+
  theme_bw(20)
plot of chunk unnamed-chunk-38

Too small of a window

start.img.blocks<-blockify.img(start.img,8,8)
final.img.blocks<-blockify.img(final.img,8,8)

ggplot()+
  geom_raster(data=subset(start.img.blocks,phase),aes(x,y,fill="0"),alpha=0.75)+
  geom_raster(data=subset(final.img.blocks,phase),aes(x,y,fill="1"),alpha=0.75)+
  facet_grid(y.block~x.block,scales="free",as.table=F)+
  labs(fill="time")+
  theme_bw(10)
plot of chunk unnamed-chunk-39


corr.img.blocks<-blockify.img(final.img,8,8,block.fn=w.bcf(start.img,4),.parallel=T)
ggplot(corr.img.blocks,aes(vx,vy,fill=e.val))+
  geom_raster()+geom_density2d(data=subset(corr.img.blocks,e.val>0),aes(weight=e.val^2))+
  coord_equal()+facet_grid(y.block~x.block,as.table=F)+
  scale_fill_gradient2(high="red")+
  labs(fill="Correlation")+
  theme_bw(10)
plot of chunk unnamed-chunk-40

Overlap

max.block.vals<-max.corr.val(corr.img.blocks)
ggplot()+
  geom_raster(data=subset(start.img,phase),aes(x,y,fill="0"),alpha=0.75)+
  geom_raster(data=subset(final.img,phase),aes(x,y,fill="1"),alpha=0.75)+
  geom_point(data=max.block.vals,aes(x.center,y.center),color="green",size=2)+
  geom_segment(data=max.block.vals,aes(x.center,y.center,xend=x.center+vx,yend=y.center+vy),
               arrow=arrow(length = unit(0.3,"cm")))+
  coord_equal()+
  labs(fill="time")+
  theme_bw(20)
plot of chunk unnamed-chunk-41

Shearing

start.grid<-rand.list(25)
start.img<-fix.grid(make.spheres(start.grid,base.gr=seq(-1,1,length.out=30)))

final.grid<-data.frame(x=with(start.grid,x+y/5),y=with(start.grid,y),r=start.grid$r)
final.img<-fix.grid(make.spheres(final.grid,base.gr=seq(-1,1,length.out=30)))

ggplot()+
  geom_raster(data=subset(start.img,phase),aes(x,y,fill="0"),alpha=0.75)+
  geom_raster(data=subset(final.img,phase),aes(x,y,fill="1"),alpha=0.75)+
  coord_equal()+
  labs(fill="time")+
  theme_bw(20)
plot of chunk unnamed-chunk-42


corr.img.blocks<-blockify.img(final.img,5,5,block.fn=w.bcf(start.img,4,2),.parallel=T)
max.block.vals<-max.corr.val(corr.img.blocks)
ggplot()+
  geom_raster(data=subset(start.img,phase),aes(x,y,fill="0"),alpha=0.75)+
  geom_raster(data=subset(final.img,phase),aes(x,y,fill="1"),alpha=0.75)+
  geom_point(data=max.block.vals,aes(x.center,y.center),color="green",size=2)+
  geom_segment(data=max.block.vals,aes(x.center,y.center,xend=x.center+vx,yend=y.center+vy),
               arrow=arrow(length = unit(0.3,"cm")))+
  coord_equal()+
  labs(fill="time")+
  theme_bw(20)
plot of chunk unnamed-chunk-43

Compression

start.grid<-rand.list(40)
start.img<-fix.grid(make.spheres(start.grid,base.gr=seq(-1,1,length.out=40)))

final.grid<-data.frame(x=with(start.grid,x),y=with(start.grid,y),r=start.grid$r/2)
final.img<-fix.grid(make.spheres(final.grid,base.gr=seq(-1,1,length.out=40)))

ggplot()+
  geom_raster(data=subset(start.img,phase),aes(x,y,fill="0"),alpha=0.75)+
  geom_raster(data=subset(final.img,phase),aes(x,y,fill="1"),alpha=0.75)+
  coord_equal()+
  labs(fill="time")+
  theme_bw(20)
plot of chunk unnamed-chunk-44


corr.img.blocks<-blockify.img(final.img,10,10,block.fn=w.bcf(start.img,4),.parallel=T)
max.block.vals<-max.corr.val(corr.img.blocks)
ggplot()+
  geom_raster(data=subset(start.img,phase),aes(x,y,fill="0"),alpha=0.75)+
  geom_raster(data=subset(final.img,phase),aes(x,y,fill="1"),alpha=0.75)+
  geom_point(data=max.block.vals,aes(x.center,y.center),color="green",size=2)+
  geom_segment(data=max.block.vals,aes(x.center,y.center,xend=x.center+vx,yend=y.center+vy),
               arrow=arrow(length = unit(0.3,"cm")))+
  coord_equal()+
  labs(fill="time")+
  theme_bw(20)
plot of chunk unnamed-chunk-45

Distribution Metrics

As we covered before distribution metrics like the distribution tensor can be used for tracking changes inside a sample. Of these the most relevant is the texture tensor from cellular materials and liquid foam. The texture tensor is the same as the distribution tensor except that the edges (or faces) represent physically connected / touching objects rather than touching Voronoi faces (or conversely Delaunay triangles).

These metrics can also be used for tracking the behavior of a system without tracking the single points since most deformations of a system also deform the distribution tensor and can thus be extracted by comparing the distribution tensor at different time steps.

Quantifying Deformation: Strain

We can take any of these approaches and quantify the deformation using a tool called the strain tensor. Strain is defined in mechanics for the simple 1D case as the change in the length against the change in the original length. \[ e = \frac{\Delta L}{L} \] While this defines the 1D case well, it is difficult to apply such metrics to voxel, shape, and tensor data.

Strain Tensor

There are a number of different ways to calculate strain and the strain tensor, but the most applicable for general image based applications is called the infinitesimal strain tensor, because the element matches well to square pixels and cubic voxels.

b.sq.ele<-0.4*data.frame(dx=c(-1,1,1,-1,-1),dy=c(-1,-1,1,1,-1))
s.grid<-expand.grid(x=c(0:3),y=c(0:3))
sq.eles<-ddply(cbind(s.grid,id=1:nrow(s.grid)),.(x,y),function(in.pt) {
  data.frame(x=in.pt[1,"x"]+b.sq.ele$dx,
             y=in.pt[1,"y"]+b.sq.ele$dy,
             id=in.pt[1,"id"])
})
ggplot(sq.eles,aes(x,y))+
  geom_polygon(aes(group=id))+
  coord_equal()+
  theme_bw(25)
plot of chunk unnamed-chunk-46


A given strain can then be applied and we can quantify the effects by examining the change in the small element.

s.grid<-expand.grid(x=c(0:3),y=c(0:3))
rh.eles<-ddply(cbind(s.grid,id=1:nrow(s.grid)),.(x,y),function(in.pt) {
  out.df<-data.frame(x=in.pt[1,"x"]+b.sq.ele$dx/(in.pt[1,"x"]+1),
             y=in.pt[1,"y"]+b.sq.ele$dy/(in.pt[1,"y"]+1),
             id=in.pt[1,"id"],
             x.rel.sz=1,
             y.rel.sz=1,
             x.sz=1/(in.pt[1,"y"]+1),
             y.sz=1/(in.pt[1,"x"]+1))
  out.df$e11<-with(out.df,(x.sz-x.rel.sz)/x.rel.sz)
  out.df$e22<-with(out.df,(y.sz-y.rel.sz)/y.rel.sz)
  out.df$e12<-0.5*with(out.df,(x.sz-x.rel.sz)/y.rel.sz+(y.sz-y.rel.sz)/x.rel.sz)
  
  out.df$vol<-with(out.df,e11+e22)
  out.df$dva<-with(out.df,sqrt((e11-vol/2)^2+(e22-vol/2)^2))
  out.df
})
ggplot(rh.eles,aes(x,y))+
  geom_polygon(aes(group=id))+
  coord_equal()+
  theme_bw(25)
plot of chunk unnamed-chunk-47

Types of Strain

We catagorize the types of strain into two main catagories:

\[ \underbrace{\mathbf{E}}_{\textrm{Total Strain}} = \underbrace{\varepsilon_M \mathbf{I_3}}_{\textrm{Volumetric}} + \underbrace{\mathbf{E}^\prime}_{\textrm{Deviatoric}} \]

Volumetric / Dilational

The isotropic change in size or scale of the object.

Deviatoric

The change in the proportions of the object (similar to anisotropy) independent of the final scale


ggplot(rh.eles,aes(x,y))+
  geom_polygon(aes(group=id,fill=vol),color="black")+
  coord_equal()+
  labs(fill="Volumetric\nStrain")+
  scale_fill_gradient2()+
  theme_bw(25)
plot of chunk unnamed-chunk-48

ggplot(rh.eles,aes(x,y))+
  geom_polygon(aes(group=id,fill=dva),color="black")+
  coord_equal()+
  labs(fill="Deviatoric\nStrain")+
  scale_fill_gradient2()+
  theme_bw(25)
plot of chunk unnamed-chunk-49

Strain Tensor

This can then be expressed more concretely in the form of a strain tensor which can be broken down into volumetric and deviatoric strain \[ \mathbf{E} = \begin{bmatrix} \varepsilon_{11} & \varepsilon_{12} & \varepsilon_{13} \\ \varepsilon_{21} & \varepsilon_{22} & \varepsilon_{23} \\ \varepsilon_{31} & \varepsilon_{32} & \varepsilon_{33} \end{bmatrix} \]

Thickness - Lung Tissue

__ Data provided by Goran Lovric and Rajmund Mokso __ Since lung Lung Tissue

Curvature - Metal Systems

__ Data provided by Julie Fife __

Two Point Correlation - Volcanic Rock

__ Data provided by Mattia Pistone and Julie Fife __ The air phase changes from small very anisotropic bubbles to one large connected pore network. The same tools cannot be used to quantify those systems. Furthermore there are motion artifacts which are difficult to correct.


We can utilize the two point correlation function of the material to characterize the shape generically for each time step and then compare.